home
***
CD-ROM
|
disk
|
FTP
|
other
***
search
/
C/C++ Interactive Reference Guide
/
C-C++ Interactive Reference Guide.iso
/
c_ref
/
csource5
/
357_01
/
cstar1.exe
/
G2.C
< prev
next >
Wrap
C/C++ Source or Header
|
1991-06-18
|
76KB
|
3,143 lines
/*
C* -- Code generation -- Expression routines.
source: g2.c
started: January 21, 1986
version:
February 20, 1987
March 7, 1989
PUBLIC DOMAIN SOFTWARE
The CSTAR program was placed in the public domain on June 15, 1991,
by its author and sole owner,
Edward K. Ream
1617 Monroe Street
Madison, WI 53711
(608) 257-0802
CSTAR may be used for any commercial or non-commercial purpose.
See cstar.h or cstar.c for a DISCLAIMER OF WARRANTIES.
*/
#include "cstar.h"
/*
NOTE:
added argument on resolve(), sop_assnop, get_?temp, and
get_temp calls is eventually to be set to R_?? or -R_??
to serve as a hint to use or to avoid R_?? as a temporary.
*/
/*
CAUTION:
Gotchas involving replacement operators and (areg)+ address modes.
The form
(*p++) += 3;
is meaningful and is probably valid C. However, it creates an
implicit LHS temporary involving p without mentioning p twice.
The form
(*p++) = *p + 3;
is undefined as to its effect and we need not worry too much
about it, although it probably ought to do the side effect
exactly once.
In coding replacement operators (ASSN_OP), the following
considerations apply when a has_effect() mode (areg)+ or -(areg)
is an operand:
- The operator probably ought to do the side effect and not
return a node that still contains it. If OPT_NONEED is in
effect it certainly ought to do it.
- The operator had better not do the side effect and return a
node that still contains it.
- The operator had better not do the side effect more than once.
- The operator had better discard the node once a POST side effect
is done, since the node then no longer represents a good lvalue.
*/
/*
As much as possible, create and alter loc_nodes through
locn_chmod(), locn_reg(), etc. to facilitate future efforts
to limit proliferation by management of a set of commonly used
nodes. WARNING: the manager will have to take care of types on
the s_stack, and this may severely reduce its utility.
To modify a node, call locn_xdupl() so that a copy will be
made, e.g. if it is attached to something. If it is known that
a copy is needed, call locn_dupl.
To perform additive combination, call x_addpsi(); see x_addpsi()
for calling conditions!
*/
/*
CAUTION:
The evaluation of a general expression involves alternating calls
to gen_e1 and gen_b1 by each other. When gen_e1 runs out of
registers, it pushes a register onto the stack, thus raising
ss_bot. Other (unpushed) registers appear as entries lying between
ss_bot and ss_count. The assumption that NO TEMPORARY register
is duplicated between ANY two of these latter entries in any way
is fairly wired into the code. For example, a composite
loc_node is often added up and loaded into a temporary, and that
temporary is usually drawn from the loc_node itself if that is
possible. Such an operation is clearly nonsense if an additional
reference to that same temporary exists elsewhere in the ss_bot-to-
ss_count frame. When an operand is discarded, free_temp() frees
the temporaries that appear in it; same comment. The boolean
and ternary operators need to make the same assumption in order
to locate a place to save the flags or put the result (respectively)
in the event that stack restoration occurs.
A set of sufficient conditions guaranteeing the truth of this
assumption in practice is:
1. get_temp()/get_dtemp()/get_atemp() does not return any register
contained within that frame. This is true by the very intent
of get_temp. (Get_temp may return a register that appears
below ss_bot that was pushed without combination, but that
doesn't matter, because those registers lie outside the frame.)
2. operators that push without combination (see the exit portion
of gen_b1(), and sop_ternop()) clean up their own mess. This
is the intent of the ss_restore calls at those operators.
3. any operator that pushes more than one item onto the s_stack
pushes no duplicate temporaries. If only one item is pushed
onto the stack, and if the temporaries in that item are
derived from DISCARDED operands or from get_temp(), then no
duplication can occur. If operands are examined, and then
merely repushed, no duplication will be created that wasn't
already present. I.e. some effort will be required in
order to create a problem.
*/
/*
WARNING:
d5 = i + j; generates
dt = i;
dt += j;
d5 = dt;
rather than
d5 = i;
d5 += j;
since there is as yet no mechanism for passing a lhs temporary,
nor is there a peephole for removing a construction of this sort.
It may be that the peephole would be simpler.
*/
/*
NOTE: whether to disturb the upper portions of declared registers
or not is now a compile-time flag. The choice has to do with
whether
mem = (long) d5w;
should be cast in d5 or cast elsewhere. Probably in those cases
where it is critical that it be done elsewhere (e.g. an impending
X_SWAP) the programmer should use pseudo-ops. The more rigorous
option is also available, but note that it often generates
terrible code.
*/
/*
Externally visible routines defined in this file:
*/
void gen_bool (struct node *p,
struct node *true_lab, struct node *false_lab);
void gen_expr (struct node *p);
void gen_a0exp (struct node *p);
void gen_d0exp (struct node *p);
struct node * gen_dexpr (struct node *p);
void gen_bpost (struct node *p);
/* Forward declarations of static functions. */
#define g_bra(p) g_1lab(X_BRA, p);
static void gen_b1 (int must_pop, struct node *p,
struct node *true_lab, struct node *false_lab);
static struct diop *
ops_bcc (struct node *p,
int utcode, int tcode, int ufcode, int fcode,
int xutcode, int xtcode, int xufcode, int xfcode);
static struct diop *
ops_xne (struct node *p);
static void i_gene1 (void);
static void gen_e1 (struct node *p);
static unsigned long
gen_args (register struct node *arg, int top);
static void gen_indir (struct node *p);
static struct node *
no_effect (struct node *p);
static unsigned long
aoffs (unsigned int size, unsigned int *shift);
void g_2call (struct node *loc1, struct node *loc2,
char *string);
static void sop_add (struct node *p, int op);
static void sop_adr (struct node *p);
static void sop_arrow (struct node *p);
static void sop_assnop (struct node *p, int op, int hint);
static void sop_call (struct node *p);
static void sop_cast (struct node *p);
static int sop_cmp (struct node *p);
static void sop_comma (struct node *p);
static void sop_dbinop (struct node *p, int op);
static void sop_dot (struct node *p);
static void sop_logical (struct node *p, int op);
static void sop_not (struct node *p);
static void sop_relop (struct node *p, int op);
static void sop_sep (struct node *p);
static void sop_ternop (struct node *p);
static void sop_unop (struct node *p, int op);
static void sop_ustar (struct node *p);
static void sop_ppop (struct node *p);
/*
Define pointer to the Local Code List (LCL) for arithmetic expressions.
*/
static struct code_node * lcl_head;
static struct code_node * lcl_tail;
/*
Access to register parameter, for unsigned extend in casts and
in pointer addition
*/
extern int na_free;
extern int nd_free;
extern int ss_count, ss_forks, ss_bot;
/*
E N T R Y P O I N T R O U T I N E S
All access to this module are via these routines:
gen_bpost(); generate post operators of a boolean expression.
gen_bool(); generate code for a boolean expression and branch
to either of two labels, depending on result.
gen_expr(); generate code for an outer arithmetic expression.
gen_a0exp(); generate code for an arithmetic expression and leave
the result in a0_loc or d0_loc.
gen_dexpr(); generate code for an arithmetic expression and leave
the result as a value in a register.
ALL of these directly call
i_gene1()
ONLY gen_expr() and gen_dexpr() call
gen_pp()
*/
/*
ENTRY POINT ROUTINE
Generate code for a boolean expression.
*/
void
gen_bool(struct node *p, struct node *true_lab, struct node *false_lab)
{
TRACEPB("gen_bool", printf("(%p, %p, %p)\n",
p, true_lab, false_lab));
i_gene1(); /* Phase 1 */
TRACEP("gen_bool", printf("\n"); pr_expr(p); printf("\n"););
gen_b1(0, p, true_lab, false_lab);
TICKX("gen_bool");
}
/*
ENTRY POINT ROUTINE
Generate code for an outer arithmetic expression.
Nothing is left on the s_stack.
*/
void
gen_expr(struct node *p)
{
TRACEPB("gen_expr",
printf("(%p)\n", p);
pr_expr(p); printf("\n"));
if (p == NULL) {
RETURN_VOID("gen_expr");
}
gen_pp(p); /* Phase 0 */
TRACEP("gen_expr", printf("\n"); pr_expr(p); printf("\n"););
i_gene1(); /* Phase 1 */
gen_e1(p);
x_lookat(ss_pop());
TICKX("gen_expr");
}
/*
ENTRY POINT ROUTINE
Generate code for an outer expression returning a0/d0
This is used for function returns.
*/
void
gen_a0exp(struct node *p)
{
struct node *loc1;
TRACEPB("gen_a0exp", printf("(%p)\n", p));
gen_pp(p); /* Phase 0 */
i_gene1(); /* Phase 1 */
gen_e1(p);
loc1 = resolve(ss_pop());
if (!is_equiv(loc1, a0_loc)) {
a0_loc -> n_cltype = p -> n_cltype;
g_2l1(X_MOVE, loc1, a0_loc);
}
TICKX("gen_a0exp");
}
void
gen_d0exp(struct node *p)
{
struct node *loc1;
TRACEPB("gen_d0exp", printf("(%p)\n", p));
gen_pp(p); /* Phase 0 */
i_gene1(); /* Phase 1 */
gen_e1(p);
loc1 = resolve(ss_pop());
if (!is_equiv(loc1, d0_loc)) {
d0_loc -> n_cltype = p -> n_cltype;
g_qmove(loc1, d0_loc);
}
TICKX("gen_d0exp");
}
/*
ENTRY POINT ROUTINE
Generate code for an outer expression returning a value in
a D register regardless of type. The D register may or may
not be a temporary.
This is used by the switch statement in order to evaluate
its argument.
*/
struct node *
gen_dexpr(struct node *p)
{
struct node *loc1, *loc3;
TRACEPB("gen_dexpr", printf("(%p)\n", p));
gen_pp(p); /* Phase 0 */
i_gene1(); /* Phase 1 */
gen_e1(p);
loc1 = resolve(ss_pop());
if (!is_dloc(loc1)) {
loc3 = get_dtemp();
g_qmove(loc1, loc3);
loc3 -> n_cltype = loc1 -> n_cltype;
TRACEP("gen_dexpr",
printf("return: ");
pr_loc(loc3); printf("\n"));
RETURN_PTR("gen_dexpr", loc3);
}
else {
TRACEP("gen_dexpr",
printf("returns: ");
pr_loc(loc1); printf("\n"));
RETURN_PTR("gen_dexpr", loc1);
}
}
/*
ENTRY POINT ROUTINE
*/
void
gen_bpost(struct node *p)
{
TRACEPB("gen_bpost", printf("(%p)\n", p));
;
TICKX("gen_bpost");
}
/*
CAUTION: NO calls to i_gene1() should appear after this point!
*/
/*
Generate code to evaluate a boolean expression and to
jump to true_lab if true and false_lab if false.
ONLY ONE of true_lab or false_lab should ever be FALL_THROUGH.
*/
static void
gen_b1( int must_pop, struct node *p,
register struct node *true_lab, register struct node *false_lab)
{
register int op;
register struct node * loc;
struct node *imm_lab;
register struct diop *ops;
int ssb_save, sf_save;
TRACEPB("gen_b1", printf("(%d, %p, %p, %p) ",
must_pop, p, true_lab, false_lab));
op = p -> n_type;
TRACEP("gen_b1", printf("op: %s\n", ps_tok(op)));
if (p == NULL) {
/* There is not enough information to generate a branch! */
RETURN_VOID("gen_b1");
}
/* operators that recurse simply return immediately */
switch(op) {
case NOT_TOK:
/* just exchange labels */
gen_b1(must_pop, p -> n_arg1, false_lab, true_lab);
RETURN_VOID("gen_b1");
case LAND_TOK:
/* logical and/or creates branch points */
if (!must_pop) {
must_pop = 1;
}
/* logical "and" fails directly to false_lab */
TRACEP("gen_b1", printf("logical and\n"));
if (false_lab == FALL_THROUGH) {
/* fix an extra label so it falls through */
imm_lab = new_clabel();
gen_b1(must_pop, p -> n_arg1, FALL_THROUGH, imm_lab);
/* if arg1 is true, fall through to arg2 */
gen_b1(2, p -> n_arg2, true_lab, false_lab);
g_label(imm_lab);
}
else {
/* otherwise do what comes naturally */
gen_b1(must_pop, p -> n_arg1, FALL_THROUGH, false_lab);
gen_b1(2, p -> n_arg2, true_lab, false_lab);
}
RETURN_VOID("gen_b1");
case LOR_TOK:
if (!must_pop) {
must_pop = 1;
}
/* logical "or" succeeds directly to true_lab */
TRACEP("gen_b1", printf("logical or\n"));
if (true_lab == FALL_THROUGH) {
/* arrange an extra label so it happens */
imm_lab = new_clabel();
gen_b1(must_pop, p -> n_arg1, imm_lab, FALL_THROUGH);
gen_b1(2, p -> n_arg2, true_lab, false_lab);
g_label(imm_lab);
}
else {
/* just do what comes naturally */
gen_b1(must_pop, p -> n_arg1, true_lab, FALL_THROUGH);
gen_b1(2, p -> n_arg2, true_lab, false_lab);
}
RETURN_VOID("gen_b1");
}
TRACEP("gen_b1", printf("arithmetical op: %s\n", ps_tok(op)););
/* these generate arithmetical code */
/*
The following line gets us a place to put the flags if
we need to save them while restoring registers, (or if
we choose to save them during a post-op?); it should
generate code only in the first test branch--which is always
executed--and not anywhere else, so that the stack balance
will be the same at the end of all branches.
This pseudo-reservation of a temp is only needed if we lie
under a branch point in the current boolean, that is,
must_pop > 0.
As to how the reservation works, see the note under sop_ternop
*/
if (must_pop && ss_count > ss_bot) {
free_temp(loc = get_dtemp());
}
/* and we only need to actually save and restore in the right
branches, since the leftmost branch gets generated first
and is always executed */
if (must_pop >= 2) {
ssb_save = ss_bot;
sf_save = ss_forks;
ss_forks = ss_count; /* pre-existing items save-and-restore */
}
switch (op) {
/*
the operators form quasi-rings (no identity),
with true/false transformation and operand exchange
transformation as the two operations.
actually, it's exactly an order-2 Rubik's cube!
the true/false alternatives are mutually exclusive
and exactly cover the comparision possibilities
(which are of an underlying ternary nature); they
represent what happens if you exchange true_lab
with false_lab
the exchange alternatives represent what happens
if you exchange the operands of the relop. note that
expressions transformed by changing just the relop
in this way are not mutually exclusive-- a <= b
is not mutually exclusive with a >= b. this
transformation implements the conversion from a <= b
to b >= a --or a == b to b == a.
*/
case EQUAL_TOK:
ops = ops_bcc(p,
X_BEQ, X_BEQ, X_BNE, X_BNE, /* <-> true/false */
X_BEQ, X_BEQ, X_BNE, X_BNE); /* exchange */
break;
case NE_TOK:
ops = ops_bcc(p,
X_BNE, X_BNE, X_BEQ, X_BEQ,
X_BNE, X_BNE, X_BEQ, X_BEQ);
break;
case LT_TOK:
/* toks unsigned, signed */
ops = ops_bcc(p,
X_BLO, X_BLT, X_BHS, X_BGE,
X_BHI, X_BGT, X_BLS, X_BLE);
break;
case GT_TOK:
ops = ops_bcc(p,
X_BHI, X_BGT, X_BLS, X_BLE, /* <-> true/false */
X_BLO, X_BLT, X_BHS, X_BGE); /* exchange */
break;
case LE_TOK:
ops = ops_bcc(p,
X_BLS, X_BLE, X_BHI, X_BGT,
X_BHS, X_BGE, X_BLO, X_BLT);
break;
case GE_TOK:
ops = ops_bcc(p,
X_BHS, X_BGE, X_BLO, X_BLT,
X_BLS, X_BLE, X_BHI, X_BGT);
break;
default:
/* Arithmetic op or ID_TOK */
ops = ops_xne(p);
}
/* WARNING:
this saves flags even if the popped items generate no
code...
*/
if (must_pop >= 2) {
if (ss_bot > ssb_save) {
g_2(X_MOVE, sr_loc, loc);
do {
(void) ss_restore();
}
while (ss_bot > ssb_save);
g_2(X_MOVE, loc, ccr_loc);
}
ss_forks = sf_save;
}
/* generate the actual branch */
if (true_lab != FALL_THROUGH) {
g_1lab(ops -> o_true, true_lab);
}
if (false_lab != FALL_THROUGH) {
g_1lab(ops -> o_false, false_lab);
}
TICKX("gen_b1");
}
/*
Evaluate comparison followed by a branch.
This code is really a piece of gen_b1 and shouldn't be used
elsewhere.
*/
static struct diop *
ops_bcc(struct node *p,
int utcode, int tcode, int ufcode, int fcode,
int xutcode, int xtcode, int xufcode, int xfcode)
{
register struct type_node *t;
static struct diop ops;
TRACEPB("ops_bcc", printf("(%p, %d, %d, %d, %d, %d, %d, %d, %d)\n",
p, utcode, tcode, ufcode, fcode, xutcode, xtcode, xufcode, xfcode));
t = p -> n_arg1 -> n_cltype;
if (t -> t_typtok != INT_TYPE || (t -> t_mclass & UNSIGNED_MOD)) {
/* unsigned pairs */
if (!sop_cmp(p)) {
ops . o_true = utcode;
ops . o_false = ufcode;
}
else {
/* operands in reverse order */
ops . o_true = xutcode; /* exchange */
ops . o_false = xufcode;
}
}
else {
/* signed pairs */
if (!sop_cmp(p)) {
ops . o_true = tcode;
ops . o_false = fcode;
}
else {
/* operands in reverse order */
ops . o_true = xtcode; /* exchange */
ops . o_false = xfcode;
}
}
TRACEP("ops_bcc",
printf("returns <%s, %s>\n",
xzp_tab[ops.o_true], xzp_tab[ops.o_false]));
RETURN_PTR("ops_bcc", &ops);
}
/*
Test for nonzero.
Generate code for p. If the result is not a constant,
generate a test instruction on the result, or otherwise
generate a flag for sure, and return a
BNE/BEQ pair for the ensuing code generation to use.
Note that arithmetical operators may or may not set the flags,
and some operators call functions.
If the result is a constant, return a suitable BRA/BRN
pair for the ensuing code generation to use.
The idea is to present the peephole with unconditional branches
as much as possible.
*/
static struct diop *
ops_xne(struct node *p)
{
register struct node *loc1, *loc2;
extern struct diop ops_true;
extern struct diop ops_false;
extern struct diop ops_cond;
/* Bug fix: loc nodes do not have an n_oflags field !!!! */
TRACEPB("ops_xne", printf("(%p)\n", p));
if (p -> n_type == ID_TOK) {
gen_e1(p);
loc1 = ss_pop();
}
else {
p -> n_oflags |= OPT_ZNEED;
gen_e1(p); /* resolved and r-stack balanced */
loc1 = ss_pop();
if (p -> n_oflags & OPT_ZFLAG) {
/* code already generated that sets the flag for sure */
/* temps have been freed */
RETURN_PTR("ops_xne", &ops_cond);
}
}
if (is_cloc(loc1)) {
g_help(p, "constant used as conditional");
RETURN_PTR("ops_xne", (loc1 -> n_const)? &ops_true : &ops_false);
}
else {
if (is_aloc(loc1)) {
if (nd_free) {
loc2 = get_dtemp();
g_2l1(X_MOVE, loc1, loc2);
/* move An, An won't set flags */
free_temp(loc2);
}
else {
g_2l2(X_CMPA, zero_loc, loc1);
}
}
else {
g_1l(X_TST, loc1);
}
free_temp(loc1);
}
RETURN_PTR("ops_xne", &ops_cond);
}
/*
Initialize this phase by clearing the LCL and S stack.
*/
static void
i_gene1(void)
{
/* Initialize the local code list. */
TICKB("i_gene1");
lcl_head = NULL;
lcl_tail = NULL;
/* Free all available registers and set up s_stack. */
free_all();
/* Save the global pointers to the code list. */
/* WARNING: not ready yet. */
TICKX("i_gene1");
}
/*
Return a no-effect node corresponding to a has_effect node
*/
static struct node *
no_effect(register struct node *loc)
{
TRACEPB("no_effect", printf("(%p)\n", loc));
if (has_effect(loc -> n_mode)) {
loc = locn_dupl(loc); /* NOT locn_xdupl!!! */
loc -> n_mode = EA_MODE;
}
else {
RETURN_PTR("no_effect", loc);
}
TICKX("no_effect");
}
/*
Generate code from a parse tree.
*/
static void
gen_e1(register struct node *p)
{
struct node *else_lab, *end_lab;
struct node *loc1;
register op, i;
TRACEPB("gen_e1",
printf("(%p)\n", p);
pr_expr(p); printf("\n"));
op = p -> n_type;
switch (op) {
case ID_TOK:
ss_push(p);
RETURN_VOID("gen_e1");
/* S P E C I A L F O R M S. */
case CALL_TOK:
/* Special form. */
sop_call(p);
RETURN_VOID("gen_e1");
case COMMA_TOK:
/* Special form. */
sop_comma(p);
RETURN_VOID("gen_e1");
case SEPARATOR_TOK:
/* Special form. */
sop_sep(p);
RETURN_VOID("gen_e1");
case COLON_TOK:
case QUESTION_TOK:
/* Special form. */
sop_ternop(p);
RETURN_VOID("gen_e1");
case NOT_TOK:
/* Special form. */
sop_not(p);
RETURN_VOID("gen_e1");
case UAND_TOK:
/* Special form. */
sop_adr(p);
RETURN_VOID("gen_e1");
case PLUS_TOK:
case MINUS_TOK:
/* Special form. */
sop_add(p, op);
RETURN_VOID("gen_e1");
case LAND_TOK:
case LOR_TOK:
/* Special form. */
sop_logical(p, op);
RETURN_VOID("gen_e1");
/* N O N - O R D I N A R Y O P E R A T O R S. */
case USTAR_TOK:
/* Special form. */
sop_ustar(p);
RETURN_VOID("gen_e1");
case DOT_TOK:
gen_e1(p -> n_arg2);
gen_e1(p -> n_arg1);
sop_dot(p);
RETURN_VOID("gen_e1");
case ARROW_TOK:
gen_e1(p -> n_arg2);
gen_e1(p -> n_arg1);
sop_arrow(p);
RETURN_VOID("gen_e1");
case CAST_TOK:
gen_e1(p -> n_arg1);
sop_cast(p);
RETURN_VOID("gen_e1");
}
/* O R D I N A R Y O P E R A T O R S. */
if (is_unop(op)) {
gen_e1(p -> n_arg1);
switch(op) {
case PRE_INC_TOK:
case PRE_DEC_TOK:
case POST_INC_TOK:
case POST_DEC_TOK:
sop_ppop(p);
break;
default:
sop_unop(p, op);
}
}
else if (is_relop(op)) {
gen_e1(p -> n_arg2);
gen_e1(p -> n_arg1);
sop_relop(p, op);
}
else if (is_assnop(op)) {
/* special form */
sop_assnop(p, op, 0);
}
else if (is_binop(op)) {
/*
this sequence has to be maintained in order that
the stacking sequence work properly for non-
commutative operators; e.g. the divisor (second
argument) has to be pushed first so it will be
properly available if it gets converted to a
pop_loc. the stacking happens to work the same
for operators that execute a JSR.
*/
gen_e1(p -> n_arg2);
gen_e1(p -> n_arg1);
sop_dbinop(p, op);
}
else {
g_error(p, "gen_e1: internal: bad operator");
printf("gen_e1: bad token %d %s\n", op, ps_tok(op));
}
TICKX("gen_e1");
}
/*
S T A C K M A C H I N E O P E R A T O R S.
All sop_xxx() routines take their operands on the S stack
and leave their results, generating machine code in the process.
All sop_xxx() routines are called with the parse node of the intended
result as a parameter. This is useful for doing type checking.
Most sop_xxx() routines assume that their operands have already been
evaluated by calling gen_e1(), but several "special forms" such as
the ternary operator, array references and function calls, must
evaluate their arguments in a non-standard manner.
*/
/*
Generate code to compute * of an expression.
Note that if result is of type array, nothing happens.
CAUTION: sop_ustar is a special form. it exists only to perform a
test and do the post++ combination if possible. if the
combination is not done, an extra temporary is generated
to hold the pre-++ value for later use. pre-ops do not
generate such a temporary, and we have not decided whether
they should be handled here or in peep_hole.
gen_indir is called by sop_arrow
gen_indir does not evaluate its argument, which is
used only in order to attach the type (and might
be used in a call to g_error)
*/
static void
sop_ustar(register struct node *p)
{
register unsigned long scale;
register struct node *q;
register struct node *locp;
TRACEPB("sop_ustar", printf("(%p)\n", p));
switch (p -> n_arg1 -> n_type) {
case POST_INC_TOK:
case PRE_DEC_TOK:
TRACEP("sop_ustar", printf("try special ++/--\n"));
q = p -> n_arg1 -> n_arg1;
if (q -> n_cltype -> t_link) {
scale = q -> n_cltype -> t_link -> t_tsize;
}
else {
/* this ought not to happen */
g_error(p, "sop_ustar: internal: defective pointer type");
goto normal;
}
/*
NOTE: a similar optimization could occasionally arise
with ->. However, the following test would still have
to be done, and it would rule out all but certain
rather pathological cases. We do not recommend
adding such code to -> since it would, e.g. not
shorten C-Star by so much as one byte.
*/
if (scale != p -> n_cltype -> t_tsize ||
(scale != 1 && scale != 2 && scale != 4) ) {
TRACEP("sop_ustar",
printf("cancel special: scale = %ld\n", scale));
goto normal;
}
/* generate the location on which the --/++ operates */
gen_e1(q);
locp = ss_pop();
if (is_aloc(locp) && !is_atloc(locp)) {
TRACEP("sop_ustar", printf("use special mode\n"));
locp = locn_chmod(locp,
(p -> n_arg1 -> n_type == POST_INC_TOK)?
EAPSI_MODE : EAPRD_MODE);
locp -> n_cltype = p -> n_cltype;
ss_push(locp);
}
else {
TRACEP("sop_ustar", printf("cancel special: not an aloc\n"));
/* do NOT fiddle with type of locp in this branch */
ss_push(locp);
sop_ppop(p -> n_arg1);
gen_indir(p);
}
break;
default:
normal:
gen_e1(p -> n_arg1);
gen_indir(p);
}
RETURN_VOID("sop_ustar");
}
static void
gen_indir(struct node *p)
{
register struct node *loc1, *loc3;
register int reg, op;
TRACEPB("gen_indir", printf("(%p)\n", p));
loc1 = loc3 = ss_pop();
if (p -> n_cltype -> t_typtok == ARRAY_TYPE) {
/* yet another wierd property of arrays... */
/* do nothing */
}
else if (loc3 -> n_mode == VALUE_MODE) {
TRACEP("gen_indir", printf("change to EA_MODE\n"));
loc3 = locn_chmod(loc3, EA_MODE);
loc3 = resolve(loc3);
}
else {
TRACEP("gen_indir", printf("already EA_MODE\n"));
/* must load the ea and set the mode */
loc1 = resolve(loc1);
if (reg = has_atreg(loc1)) {
loc3 = locn_reg(reg);
}
else {
loc3 = get_atemp();
}
g_2l1(X_MOVE, loc1, loc3);
loc3 = locn_chmod(loc3, EA_MODE);
free_xtemp(loc1, loc3);
}
loc3 -> n_cltype = p -> n_cltype;
ss_push(loc3);
TICKX("gen_indir");
}
/*
Generate code for assignment operators.
NOTE: this always returns a resolved loc_node, since it always
computes a value that gets put somewhere. That loc_node MAY or
MAY NOT be the actual lvalue location.
CAUTION: see note at file head about has_effect nodes!!!
CAUTION: except for addition and sutraction, these operators
are unprepared to accept an areg destination. An atemp destination
is of course impossible. An areg destination will be prohibited
by the typing mechanism unless areg ints are permitted.
*/
#define SPECIAL_L (has_effect(loc1 -> n_mode) && !(p -> n_oflags & OPT_NONEED))
static void
sop_assnop(struct node *p, int op, int hint)
{
register struct node *loc1, *loc2, *loc3, *loc4;
register int x_op, mod;
TRACEPB("sop_assnop",
printf("(%p, %d = %s, %d)\n", p, op, ps_tok(op), hint));
gen_e1(p -> n_arg2);
loc2 = resolve(ss_pop());
/* we know we need a real source value */
ss_push(loc2);
gen_e1(p -> n_arg1);
loc1 = resolve(ss_pop()); /* destination */
loc2 = ss_pop();
if (!is_storloc(loc1)) {
g_error(p, "sop_assnop: bad destination");
ss_push(loc1);
RETURN_VOID("sop_assnop");
}
/* A temporary CANNOT be a valid C-LANGUAGE lvalue!!! */
if (is_xtloc(loc1)) {
g_error(p, "sop_assnop: destination is temporary!");
}
switch(op) {
case ASSN_TOK:
/* Simple assignment operator, just a move */
/* The buck stops here, so it's not quite so simple */
/*
NOTE: A few further tests, e.g. for constant
source, are possible. Using a constant source
may or may not pay if a temporary is available.
Be aware of using no_effect() judiciously in
any added tests.
*/
if (p -> n_oflags & OPT_NONEED) {
TRACEP("sop_assnop", printf("simple move\n"));
g_qmove(loc2, loc1);
/* side effects of loc1 already done... */
/* don't let x_lookat do them again... */
ss_push(no_effect(loc1));
free_temp(loc2);
}
else {
/* first try to pass on a register lvalue */
if (is_xloc(loc1)) {
/* loc1 is of course not a temporary */
g_qmove(loc2, loc1);
ss_push(loc1);
free_temp(loc2);
}
else if (is_xloc(loc2)) {
g_2l2(X_MOVE, loc2, loc1);
ss_push(loc2);
loc2 -> n_cltype = p -> n_cltype;
free_temp(loc1);
}
/* otherwise pass thru a temporary if there is one */
else {
if(
((p->n_type == INT_TYPE)? nd_free : na_free)
||
has_effect(loc1 -> n_mode)
||
has_effect(loc2 -> n_mode)
) {
ss_push(loc2);
loc3 = get_temp(loc1);
loc2 = ss_pop();
g_qmove(loc2, loc3);
g_2l2(X_MOVE, loc3, loc1);
loc3 -> n_cltype = p -> n_cltype;
ss_push(loc3);
free_temp(loc1);
free_temp(loc2);
}
else {
g_qmove(loc2, loc1);
ss_push(loc1);
free_xtemp(loc2, loc1);
}
}
}
RETURN_VOID("sop_assnop");
case PLUS_ASSN_TOK:
x_op = X_ADD;
goto addop;
case MINUS_ASSN_TOK:
x_op = X_SUB;
addop:
if (p -> n_cltype -> t_typtok != INT_TYPE) {
goto ptrop;
}
TRACEP("sop_assnop", printf("integer add/subtract\n"));
/* loc1 is the destination */
/* these ops have parallel widths */
if (is_xloc(loc1)) {
/* SPECIAL_L cannot be true */
g_2l2(x_op, loc2, loc1);
}
else if ((is_dloc(loc2) || is_cloc(loc2) )) {
if (SPECIAL_L) {
g_2l2(x_op, loc2, no_effect(loc1));
}
else {
g_2l2(x_op, loc2, loc1);
}
}
else {
/* operation won't fly without temporary */
loc3 = get_dtemp();
loc3 -> n_cltype = p -> n_cltype;
g_2l1(X_MOVE, loc2, loc3);
if (SPECIAL_L) {
g_2l2(x_op, loc3, no_effect(loc1));
}
else {
g_2l2(x_op, loc3, loc1);
}
free_temp(loc3);
}
ss_push(loc1);
free_temp(loc2);
RETURN_VOID("sop_assnop");
/* pointer version */
ptrop:
/* CAUTION: these tests assume pointers are LONG */
TRACEP("sop_assnop", printf("pointer destination\n"));
loc2 = x_scale(loc2, loc1 -> n_cltype -> t_link -> t_tsize);
if (is_aloc(loc1)) {
}
else if (is_atloc(loc2) && x_op == X_ADD) {
/* cannot add areg to memory */
/* SHORTCUT */
/* ABELIAN */
g_2l1(X_ADD, no_effect(loc1), loc2);
g_2l2(X_MOVE, loc2, loc1);
ss_push(loc2);
free_temp(loc1);
RETURN_VOID("sop_assnop");
}
/* all entries now have !is_aloc(loc1) true, and must therefore
ensure ensure a long loc2 prior to the addition */
#if EXCESSLENOK
else if (is_dloc(loc2)) {
if (loc2 -> n_cltype == int_type) {
loc2 -> n_cltype = long_type;
g_1l(X_EXT, loc2);
}
}
#else
else if (is_dloc(loc2) && loc2 -> n_cltype == long_type) {
}
else if (is_dtloc(loc2) && loc2 -> n_cltype == int_type) {
/* must make it long */
loc2 -> n_cltype = long_type;
g_1l(X_EXT, loc2);
}
#endif
else if (is_cloc(loc2)) {
/* constant added to non-areg ptr must be long */
loc2 -> n_cltype = long_type;
}
else {
/* NOTE: an areg can't be added to an EA */
/* that's why we don't get an atemp */
ss_push(loc2);
loc3 = get_dtemp();
loc2 = ss_pop();
loc3 -> n_cltype = long_type;
g_2l1(X_MOVE, loc2, loc3);
if (loc2 -> n_cltype != long_type) {
g_1l(X_EXT, loc3);
}
if (SPECIAL_L) {
g_2l2(x_op, loc3, no_effect(loc1));
}
else {
g_2l2(x_op, loc3, loc1);
}
free_temp(loc2);
free_temp(loc3);
ss_push(loc1);
RETURN_VOID("sop_assnop");
}
/* perform the op by the type of loc2 (sic) */
if (SPECIAL_L) {
g_2l1(x_op, loc2, no_effect(loc1));
}
else {
g_2l1(x_op, loc2, loc1);
}
ss_push(loc1);
free_temp(loc2);
RETURN_VOID("sop_assnop");
case AND_ASSN_TOK:
x_op = X_AND;
goto genop;
case OR_ASSN_TOK:
x_op = X_OR;
genop:
/* general ops do not allow areg source or dest */
/* loc1 is the destination */
if (is_dloc(loc1)) {
/* SPECIAL_L is impossible */
g_2l2(x_op, loc2, loc1);
ss_push(loc1);
free_temp(loc2);
}
else if (!is_aloc(loc1) && (is_dloc(loc2) || is_cloc(loc2) )) {
if (SPECIAL_L) {
g_2l2(x_op, loc2, no_effect(loc1));
}
else {
g_2l2(x_op, loc2, loc1);
}
}
else {
loc3 = get_dtemp();
loc3 -> n_cltype = p -> n_cltype;
g_2l1(X_MOVE, loc2, loc3);
if (SPECIAL_L) {
g_2l2(x_op, loc3, no_effect(loc1));
}
else {
g_2l2(x_op, loc3, loc1);
}
free_temp(loc3);
}
free_temp(loc2);
ss_push(loc1);
RETURN_VOID("sop_assnop");
case STAR_ASSN_TOK:
/*
destination of STAR, MOD, and DIV always ends up in
a dreg, because of the hardware or the function call.
so we always return that dreg as the result, and
SPECIAL_L is never tested since the result is
always valid. we use no_effect to ensure that
the first of two accesses to the lvalue does not
cause side effects.
*/
mod = loc1 -> n_cltype -> t_mclass;
if (mod & UNSIGNED_MOD) {
x_op = X_MULU;
}
else {
x_op = X_MULS;
}
if (mod & LONG_MOD) {
/* SHORTCUT */
g_2call(no_effect(loc1), loc2,
(mod & UNSIGNED_MOD)? "lmulu" : "lmul");
loc3 = d0_loc;
loc3 -> n_cltype = p -> n_cltype;
g_2l1(X_MOVE, d0_loc, loc1);
ss_push(loc3); /* sic */
RETURN_VOID("sop_assnop");
}
/* loc1 is the destination */
#if EXCESSLENOK
if (is_dloc(loc1)) {
#else
if (is_dtloc(loc1)) {
#endif
g_2(x_op, loc2, loc1);
ss_push(loc1);
free_temp(loc2);
}
else if (is_dtloc(loc2)) {
g_2(x_op, no_effect(loc1), loc2);
g_2l2(X_MOVE, loc2, loc1);
ss_push(loc2);
free_temp(loc1);
}
else {
loc3 = get_dtemp();
loc3 -> n_cltype = p -> n_cltype;
g_2l1(X_MOVE, no_effect(loc1), loc3);
g_2(x_op, loc2, loc3);
g_2l1(X_MOVE, loc3, loc1);
ss_push(loc3);
free_temp(loc1);
free_temp(loc2);
}
RETURN_VOID("sop_assnop");
case DIV_ASSN_TOK:
mod = loc1 -> n_cltype -> t_mclass;
if (mod & UNSIGNED_MOD) {
x_op = X_DIVU;
}
else {
x_op = X_DIVS;
}
if (mod & LONG_MOD) {
/* SHORTCUT */
g_2call(no_effect(loc1), loc2,
(mod & UNSIGNED_MOD)? "ldivu" : "ldiv");
loc3 = d0_loc;
loc3 -> n_cltype = p -> n_cltype;
g_2l1(X_MOVE, d0_loc, loc1);
ss_push(loc3); /* sic */
RETURN_VOID("sop_assnop");
}
else if (mod & CHAR_MOD) {
if (!is_cloc(loc2)) {
loc2 = x_cast(loc2, 1, mod, 2, EXCESSLENOK);
}
}
/* loc1 is the intended destination */
/* x_cast always returns a dreg */
loc3 = x_cast(no_effect(loc1), mlen(loc1), mod, 4, EXCESSLENOK);
loc3 -> n_cltype = p -> n_cltype;
g_2(x_op, loc2, loc3);
if (!is_equiv(loc1, loc3)) {
/* since x_cast always returns a dreg, any has_effect()
version of loc1 cannot match loc3, so the side
effects always get done here: */
g_2l1(X_MOVE, loc3, loc1);
}
ss_push(loc3);
free_temp(loc1);
free_temp(loc2);
RETURN_VOID("sop_assnop");
case MOD_ASSN_TOK:
mod = loc1 -> n_cltype -> t_mclass;
if (mod & UNSIGNED_MOD) {
x_op = X_DIVU;
}
else {
x_op = X_DIVS;
}
if (mod & LONG_MOD) {
/* SHORTCUT */
g_2call(no_effect(loc1), loc2,
(mod & UNSIGNED_MOD)? "lremu" : "lrem");
loc3 = d0_loc;
loc3 -> n_cltype = p -> n_cltype;
g_2l1(X_MOVE, d0_loc, loc1);
ss_push(loc3); /* sic */
RETURN_VOID("sop_assnop");
}
else if (mod & CHAR_MOD) {
if (!is_cloc(loc2)) {
loc2 = x_cast(loc2, 1, mod, 2, EXCESSLENOK);
}
}
loc3 = x_cast(no_effect(loc1), mlen(loc1), mod, 4, EXCESSLENOK);
loc3 -> n_cltype = p -> n_cltype;
g_2(x_op, loc2, loc3);
g_1(X_SWAP, loc3);
if (!is_equiv(loc1, loc3)) {
g_2l1(X_MOVE, loc3, loc1);
}
ss_push(loc3);
free_temp(loc1);
free_temp(loc2);
RETURN_VOID("sop_assnop");
case LSHIFT_ASSN_TOK:
x_op = X_ASL;
goto shift;
case RSHIFT_ASSN_TOK:
if (loc1 -> n_cltype -> t_mclass & UNSIGNED_MOD) {
x_op = X_LSR;
}
else {
x_op = X_ASR;
}
shift:
/* loc1 is the destination */
if (is_cloc(loc2) && !SPECIAL_L && !loc2 -> n_cid &&
loc2 -> n_const >= 0 && loc2 -> n_const <= 8) {
if (loc2 -> n_const <= 1 && mlen(loc1) == 2) {
if (loc2 -> n_const) {
/* shift once */
g_1l(x_op, loc1);
}
/* else do nothing */
ss_push(loc1);
RETURN_VOID("sop_assnop");
}
loc4 = loc2;
}
else if (is_dloc(loc2)) {
loc4 = loc2;
}
else {
loc4 = get_dtemp();
g_2l1(X_MOVE, loc2, loc4);
free_temp(loc2);
}
/* operate on loc1 */
if (is_dloc(loc1)) {
g_2l2(x_op, loc4, loc1);
ss_push(loc1);
}
else {
loc3 = get_dtemp();
loc3 -> n_cltype = p -> n_cltype;
g_2l1(X_MOVE, no_effect(loc1), loc3);
g_2l2(x_op, loc4, loc3);
g_2l2(X_MOVE, loc3, loc1);
ss_push(loc3);
}
free_temp(loc4);
RETURN_VOID("sop_assnop");
case XOR_ASSN_TOK:
/* machine SOURCE must be dreg */
if (is_dloc(loc2)) {
g_2l2(X_EOR, loc2, no_effect(loc1));
}
else {
loc3 = get_dtemp();
loc3 -> n_cltype = p -> n_cltype;
g_2l1(X_MOVE, loc2, loc3);
g_2l2(X_EOR, loc3, no_effect(loc1));
free_temp(loc3);
}
/* result must be taken from loc1 */
ss_push(loc1);
free_xtemp(loc2, loc1);
RETURN_VOID("sop_assnop");
default:
g_error(NULL, "sop_assnop: shouldn't happen");
printf("sop_assnop: bad op is %d %s\n", op, ps_tok(op));
ss_push(loc1);
}
RETURN_VOID("sop_assnop");
}
/*
generate two-argument call for a d0 function
args are int or long size, see note in function
loc2 must be the FIRST argument to actually be generated
so that it will lie deeper on the s_stack, and therefore on
the physical stack; if loc1 is pop_loc, than loc2 had better
be pop_loc--but if loc1 is not pop_loc, loc2 may or may not be so
the function-call order (last-to-first) mimics the instruction
order insofar as our customary stack usage is concerned
*/
void
g_2call(struct node *loc1, struct node *loc2, char *string)
{
register int c_1arg;
register struct node *lab;
/* push registers EXCEPT those containing loc1 and loc2 */
/* loc1 and loc2 will then be pushed last */
TRACEPB("g_2call", printf("(%p, %p, %s)\n",
loc1, loc2, string));
force_free(R_D0);
push_scratch();
if (ss_bot) {
c_1arg = FALSE;
}
else {
c_1arg = call_1arg;
}
if (is_equiv(loc1, pop_loc)) {
if (is_equiv(loc2, pop_loc)) {
c_1arg = FALSE;
}
else {
fatal("g_2call: push out of sequence");
}
}
else {
if (is_equiv(loc2, pop_loc)) {
c_1arg = FALSE;
}
else {
g_2l1(X_MOVE, loc2, c_1arg? nopush_loc : push_loc);
}
g_2l1(X_MOVE, loc1, push_loc);
}
free_temp(loc2);
free_temp(loc1);
lab = new_culabel(string);
lab -> c_labnum = 0;
g_1lab(X_JSR, lab);
(void) alloc_reg(R_D0);
loc1 = locn_xconst(c_1arg? 4L : 8L);
loc1 -> n_cltype = long_type;
g_2l1(X_ADDQ, loc1, a7_loc);
TICKX("g_2call");
}
/*
alter shift for byte access, and return a commensurate address offset
this is used for generating BTST type instructions
*/
static unsigned long
aoffs(unsigned int size, unsigned int *shift)
{
register int o;
TRACEPB("aoffs", printf("(%u, %u)\n", size, shift));
o = (int) (size - (*shift >> 3) - 1);
if (o < 0) {
o = 0;
g_error(NULL, "shift constant too large");
}
*shift &= 7;
RETURN_ULONG("aoffs", (unsigned long)(unsigned) o);
}
/*
Generate code for ordinary non-assignment binary operator.
Direct pointer arguments, e.g. pointer + int, should not
appear, as they should have been taken care of by the caller.
The results, therefore, always arrive in a D register.
CAUTION: heed note at call point concerning evaluation order
for arguments. Also see note at g_2call().
*/
static void
sop_dbinop(struct node *p, int op)
{
struct node *loc1, *loc2, *loc3, *locx;
register int mod, reg;
int shift;
TRACEPB("sop_dbinop", printf("(%p, %s)\n", p, ps_tok(op)));
/* note on non-abelians: division, e.g. is (loc1 / loc2); etc. */
loc1 = resolve(ss_pop());
loc2 = resolve(ss_pop());
/* special-case binops, which means many of them */
/* these cases either shortcut and exit, or else regularize the
op so the following code can handle it */
switch(op) {
case XOR_TOK:
/* EOR requires that the UNCHANGED operand be a dreg */
/* SHORTCUT */
/* get a place for the result */
if (reg = has_dtreg(loc1)) {
if (is_dtloc(loc1)) {
loc3 = loc1;
}
else {
loc3 = locn_reg(reg);
g_2l1(X_MOVE, loc1, loc3);
free_temp(loc1);
}
}
else {
ss_push(loc2);
ss_push(loc1);
loc3 = get_dtemp();
loc1 = ss_pop();
loc2 = ss_pop();
g_qmove(loc1, loc3);
free_temp(loc1);
/* NOTE: loc3 could be a sort-of tos_loc if no
dtemp is readily at hand */
}
/* put the unchanged operand into a dreg */
if (is_dloc(loc2)) {
locx = loc2;
}
else if (reg = has_dtreg(loc2)) {
locx = locn_reg(reg);
g_2l1(X_MOVE, loc2, locx);
}
else {
/* do NOT make loc3 available */
locx = get_dtemp();
g_qmove(loc2, locx);
}
loc3 -> n_cltype = p -> n_cltype;
g_2l2(X_EOR, locx, loc3);
ss_push(loc3);
free_temp(loc2);
RETURN_VOID("sop_dbinop");
case STAR_TOK:
mod = p -> n_cltype -> t_mclass;
if (mod & LONG_MOD) {
/* SHORTCUT */
g_2call(loc1, loc2,
(mod & UNSIGNED_MOD)? "lmulu" : "lmul");
loc3 = d0_loc;
loc3 -> n_cltype = p -> n_cltype;
ss_push(loc3);
RETURN_VOID("sop_dbinop");
}
else if (mod & CHAR_MOD) {
if (!is_cloc(loc2)) {
loc2 = x_cast(loc2, 1, mod, 1, EXCESSLENOK);
}
loc1 = x_cast(loc1, 1, mod, 1, FALSE);
break;
}
else {
break;
}
case DIV_TOK:
mod = p -> n_cltype -> t_mclass;
if (mod & LONG_MOD) {
/* SHORTCUT */
g_2call(loc1, loc2,
(mod & UNSIGNED_MOD)? "ldivu" : "ldiv");
loc3 = d0_loc;
loc3 -> n_cltype = p -> n_cltype;
ss_push(loc3);
RETURN_VOID("sop_dbinop");
}
goto dodiv;
case MOD_TOK:
mod = p -> n_cltype -> t_mclass;
if (mod & LONG_MOD) {
/* SHORTCUT */
g_2call(loc1, loc2,
(mod & UNSIGNED_MOD)? "lremu" : "lrem");
loc3 = d0_loc;
loc3 -> n_cltype = p -> n_cltype;
ss_push(loc3);
RETURN_VOID("sop_dbinop");
}
/* FALLTHROUGH */
dodiv:
if (mod & CHAR_MOD) {
if (!is_cloc(loc2)) {
loc2 = x_cast(loc2, 1, mod, 2, EXCESSLENOK);
}
ss_push(loc2);
loc1 = x_cast(loc1, 1, mod, 4, FALSE);
loc2 = ss_pop();
}
else {
ss_push(loc2);
loc1 = x_cast(loc1, 2, mod, 4, FALSE);
loc2 = ss_pop();
}
break;
case AND_TOK:
if (p -> n_oflags & OPT_ZNEED) {
if (
is_cloc(loc1) &&
(shift = x_shift(loc1 -> n_const)) >= 0 &&
!loc1 -> n_cid
) {
if (is_dloc(loc2)) {
}
else if (loc2 -> n_mode == EA_MODE) {
loc2 = locn_xdupl(loc2);
loc2 -> n_const += (long)
aoffs(
(unsigned) mlen(loc2),
(unsigned *) &shift);
}
else {
break;
}
g_2(X_BTST, locn_xconst((long)shift), loc2);
p -> n_oflags |= OPT_ZFLAG;
ss_push(loc1);
free_temp(loc2);
RETURN_VOID("sop_dbinop");
}
else if (
is_cloc(loc2) &&
(shift = x_shift(loc2 -> n_const)) >= 0 &&
!loc2 -> n_cid
) {
if (is_dloc(loc1)) {
}
else if (loc1 -> n_mode == EA_MODE) {
loc1 = locn_xdupl(loc1);
loc1 -> n_const += (long)
aoffs(
(unsigned) mlen(loc1),
(unsigned *) &shift);
}
else {
break;
}
g_2(X_BTST, locn_xconst((long)shift), loc1);
p -> n_oflags |= OPT_ZFLAG;
ss_push(loc2);
free_temp(loc1);
RETURN_VOID("sop_dbinop");
}
}
break;
}
/* regular binary operators, meaning op <ea>, dn */
/* make the destination a dtemp if it isn't already */
if (reg = has_dtreg(loc1)) {
if (is_dtloc(loc1)) {
loc3 = loc1;
}
else {
loc3 = locn_reg(reg);
g_2l1(X_MOVE, loc1, loc3);
free_temp(loc1);
}
}
else if (is_abelian(op) && (reg = has_dtreg(loc2)) ) {
if (is_dtloc(loc2)) {
loc3 = loc2;
loc2 = loc1;
}
else {
loc3 = locn_reg(reg);
g_2l1(X_MOVE, loc2, loc3);
free_temp(loc2);
loc2 = loc1;
}
}
else {
/*
Pushing loc2 makes it available, if necessary,
to get_dtemp.
*/
ss_push(loc2);
loc3 = get_dtemp();
loc2 = ss_pop();
g_qmove(loc1, loc3);
free_temp(loc1);
}
loc3 -> n_cltype = p -> n_cltype;
/* CAUTION: do not repush anything from abelian operators; operands
may be flipped, so that s_stack will become a mess */
/* CAUTION: in certain cases where loc2 is a constant, its type
is wrong here */
switch(op) {
case PLUS_TOK: g_2l2(X_ADD, loc2, loc3); break;
case MINUS_TOK: g_2l2(X_SUB, loc2, loc3); break;
case AND_TOK: g_2l2(X_AND, loc2, loc3); break;
case OR_TOK: g_2l2(X_OR, loc2, loc3); break;
case STAR_TOK:
g_2((mod & UNSIGNED_MOD)? X_MULU : X_MULS, loc2, loc3);
break;
case DIV_TOK:
g_2((mod & UNSIGNED_MOD)? X_DIVU : X_DIVS, loc2, loc3);
break;
case MOD_TOK:
g_2((mod & UNSIGNED_MOD)? X_DIVU : X_DIVS, loc2, loc3);
g_1(X_SWAP, loc3);
break;
case LSHIFT_TOK:
if (is_dloc(loc2)) {
}
else if (is_cloc(loc2) && !loc2 -> n_cid &&
loc2 -> n_const >= 0 && loc2 -> n_const <= 8) {
}
else {
loc1 = get_dtemp(); /* requires TWO temps */
g_2l1(X_MOVE, loc2, loc1);
free_temp(loc2);
loc2 = loc1;
}
g_2l2(X_ASL, loc2, loc3);
break;
case RSHIFT_TOK:
if (is_dloc(loc2)) {
}
else if (is_cloc(loc2) && !loc2 -> n_cid &&
loc2 -> n_const >= 0 && loc2 -> n_const <= 8) {
}
else {
loc1 = get_dtemp(); /* requires TWO temps */
g_2l1(X_MOVE, loc2, loc1);
free_temp(loc2);
loc2 = loc1;
}
g_2((mod & UNSIGNED_MOD)? X_LSR : X_ASR, loc2, loc3);
break;
default:
g_error(NULL, "sop_dbinop: shouldn't happen");
printf("sop_dbinop: bad op is %d %s\n", op, ps_tok(op));
}
free_xtemp(loc2, loc3);
ss_push(loc3); /* no physical push */
TICKX("sop_dbinop");
}
/*
Generate code for a cast operator.
Compare the C type of the subtree with the C type of the root.
Code is generated for widening casts and to move objects that
can't be cast in place; e.g. a byte object can't be in an A
register.
CAUTION:
Narrowing casts are NOT to return dual-register value nodes,
since x_resolve(), x_sspush(), and so on assume that the first
entry in a dual node is long. See notes at head of x2.c.
*/
static void
sop_cast(register struct node *p)
{
struct node *locx, *locy;
register struct node *loc1, *loc3;
register struct type_node *t1, *t3;
register int len1, len3;
register int ptr1, ptr3;
int reg, in_place, imod1;
/* the first part selects what to do */
TRACEPB("sop_cast", printf("(%p)\n", p));
loc1 = ss_pop();
t1 = loc1 -> n_cltype;
t3 = p -> n_cltype;
imod1 = t1 -> t_mclass;
ptr1 = !(t1 -> t_typtok == INT_TYPE);
ptr3 = !(t3 -> t_typtok == INT_TYPE);
TRACEP("sop_cast", pr_type(t1); pr_type(t3));
if (ptr3) {
if (t3 -> t_typtok == VOID_TYPE) {
zero_loc -> n_cltype = long_type;
ss_push(zero_loc);
RETURN_VOID("sop_cast");
}
if (ptr1) {
goto donothing;
}
else {
len1 = (int) t1 -> t_tsize;
if (len1 == 4) {
goto donothing;
}
len3 = 4;
goto extend;
}
}
else {
len3 = (int)(t3 -> t_tsize);
if (ptr1) {
len1 = 4;
if (len3 = 4) {
goto donothing;
}
else {
goto shrink;
}
}
len1 = (int)(t1 -> t_tsize);
if (len1 == len3) {
if (len1 >= 2) {
goto donothing;
}
else {
goto makesame;
}
}
else if (len1 > len3) {
goto shrink;
}
else {
goto extend;
}
}
/*
the following sections do what was selected
rules:
they are not to branch into each other except that extend
makes one further sub-selection of donothing.
excuse for so many goto's:
the other options only obfuscate the intent of the code;
a switch enumeration adds complexity without clarity;
the three implied functions have far too many parameters;
replication of the code would be ad absurdum and make it
almost impossible to make future alterations cleanly since
it would be difficult to tell where they would need to
be replicated.
*/
extend:
/*
some of this code is highly parallel to, but different from,
portions of x_scale
*/
loc1 = resolve(loc1);
if (is_cloc(loc1)) {
goto donothing;
}
if (is_atloc(loc1) &&
((imod1 & LONG_MOD) || !(imod1 & (CHAR_MOD | UNSIGNED_MOD)))
) {
/* anything long or signed in an atreg is 32 bits long already;
there's no other way to get it in there, since we
eschew unbalanced EXG manipulations */
goto donothing;
}
if (ptr3 && !ptr1 && (na_free || !nd_free) &&
!(imod1 & (UNSIGNED_MOD | CHAR_MOD))) {
/* SHORTCUT via move to A reg */
ss_push(loc1);
loc3 = get_atemp();
loc1 = ss_pop();
g_2l1(X_MOVE, loc1, loc3);
loc3 -> n_cltype = t3;
free_temp(loc1);
ss_push(loc3);
RETURN_VOID("sop_cast");
}
if (ptr3 && is_dtloc(loc1) && (imod1 & CHAR_MOD)) {
/* partial SHORTCUT via move to A reg */
loc3 = get_atemp();
loc1 = locn_xdupl(loc1);
loc1 -> n_cltype = int_type;
if (imod1 & UNSIGNED_MOD) {
g_2l2(X_AND, locn_xconst(255L), loc1);
}
else {
g_1l(X_EXT, loc1);
}
g_2l1(X_MOVE, loc1, loc3);
loc3 -> n_cltype = t3;
free_temp(loc1);
ss_push(loc3);
RETURN_VOID("sop_cast");
}
/* WARNING:
the potential to generate an ext.l on a D reg and
later move.l to an A reg still exists. Also, the
potential to do in-place an unsigned cast that would
better be done with a temporary exists. These may
be detectable with peephole.
*/
loc3 = x_cast(loc1, len1, imod1, len3, EXCESSLENOK);
loc3 -> n_cltype = t3;
free_xtemp(loc1, loc3);
ss_push(loc3);
RETURN_VOID("sop_cast");
shrink:
loc1 = resolve(loc1); /* eliminate dual-reg value nodes, etc */
/* see note at head of this function */
switch(loc1 -> n_mode) {
case EA_MODE:
/* this is a 68000, so offset the address */
/* this ought to always work, since if the large object is
on a proper boundary, so should be the small; the
converse, of course, is not necessarily so */
loc3 = locn_xdupl(loc1);
loc3 -> n_const += (long)(len1 - len3);
loc3 -> n_cltype = t3;
ss_push(loc3);
RETURN_VOID("sop_cast");
case EAPSI_MODE:
if (is_atloc(loc1)) {
g_error(p, "post increment has no effect");
}
case EAPRD_MODE:
if (ptr3 && is_atreg(loc1 -> n_reg1)) {
/* this case is not terribly likely */
loc3 = locn_reg(loc1 -> n_reg1);
}
else {
loc3 = get_temp(p);
}
g_2l1(X_MOVE, loc1, loc3);
free_xtemp(loc1, loc3);
loc3 -> n_cltype = t3;
ss_push(loc3);
RETURN_VOID("sop_cast");
case VALUE_MODE:
if (is_aloc(loc1) && len3 < 2) {
/* there is no areg byte reference */
loc3 = get_dtemp();
loc3 -> n_cltype = int_type;
g_2l1(X_MOVE, loc1, loc3);
free_xtemp(loc1, loc3);
}
else {
loc3 = locn_xdupl(loc1);
}
loc3 -> n_cltype = t3;
ss_push(loc3);
RETURN_VOID("sop_cast");
}
fatal("sop_cast: internal: bad mode");
makesame:
/* move byte that is in areg */
loc1 = resolve(loc1);
if (is_aloc(loc1)) {
ss_push(loc1);
loc3 = get_dtemp();
loc1 = ss_pop();
loc3 -> n_cltype = int_type;
g_2l2(X_MOVE, loc1, loc3);
free_temp(loc1);
}
else {
loc3 = locn_xdupl(loc1);
}
loc3 -> n_cltype = t3;
ss_push(loc3);
RETURN_VOID("sop_cast");
donothing:
loc3 = locn_xdupl(loc1);
loc3 -> n_cltype = t3;
ss_push(loc3);
RETURN_VOID("sop_cast");
}
/*
Generate code to test the top two elements of the S stack
and set the condition codes accordingly.
Return TRUE if the order of the compare was reversed from normal
unstacking order
*/
static bool
sop_cmp(struct node *p)
{
int exchange;
struct node *loc1, *loc2, *loc3;
/*
if loc1 is ANY register, or else
if loc2 is a constant then generate
cmp loc2, loc1
otherwise, generate:
move loc1, temp_reg
cmp loc2, temp_reg
this represents standard unstacking order. if it is
convenient and would not upset the stack, exchange
the operands instead
*/
TRACEPB("sop_cmp", printf("(%p)\n", p));
gen_e1(p -> n_arg2);
loc2 = resolve(ss_pop());
ss_push(loc2);
gen_e1(p -> n_arg1);
loc1 = resolve(ss_pop()); /* destination */
loc2 = ss_pop();
if (is_xloc(loc1) || is_cloc(loc2) ||
(has_effect(loc1 -> n_mode) && has_effect(loc2 -> n_mode)) ) {
exchange = FALSE;
}
else {
loc3 = loc1;
loc1 = loc2;
loc2 = loc3;
/* not in unstacking order */
exchange = TRUE;
}
if (is_aloc(loc1) && mlen(loc1) != 4) {
goto dtemp;
}
else if (is_cloc(loc2) && !is_cloc(loc1)) {
if (is_zloc(loc2) && !is_aloc(loc1)) {
g_1l(X_TST, loc1);
}
else {
g_2l1(X_CMP, loc2, loc1);
}
}
else if (is_xloc(loc1)) {
g_2l1(X_CMP, loc2, loc1);
}
else if (
loc2 -> n_mode == EAPSI_MODE &&
loc1 -> n_mode == EAPSI_MODE &&
loc2 -> n_reg1 != loc1 -> n_reg1
) {
/* I don't know for sure what happens if you try to
compare top of stack to next on stack with
CMPM (a7)+,(a7)+, hence the third test */
g_2l1(X_CMPM, loc2, loc1);
}
else {
dtemp:
ss_push(loc2); /* note: if both were pop, they're not
exchanged */
ss_push(loc1);
loc3 = get_dtemp(); /* NEVER atemp */
loc1 = ss_pop();
loc2 = ss_pop();
g_2l1(X_MOVE, loc1, loc3);
g_2l1(X_CMP, loc2, loc3);
free_reg(loc3 -> n_reg1);
}
/* This sets condition codes but returns no value. */
free_temp(loc2);
free_temp(loc1);
RETURN_BOOL("sop_cmp", exchange);
}
/*
Generate code for the comma operator.
This operator is a sort-of boolean whose left hand always
succeeds. Since there is no CONDITIONAL execution of anything,
however, physical-stack balancing between the branches is unnecessary.
It IS necessary to get rid of the left hand result thoroughly.
*/
static void
sop_comma(register struct node *p)
{
register struct node *loc1;
/* Generate first subtree and discard. */
TRACEPB("sop_comma", printf("(%p)\n", p));
gen_e1(p -> n_arg1);
loc1 = ss_pop();
free_temp(loc1);
x_lookat(loc1); /* we have to reference this in case it is a
pop_loc form, in which case we adjust a7. */
/* Generate second subtree and leave result on stack */
gen_e1(p -> n_arg2);
TICKX("sop_comma");
}
/*
Generate code for the list separator.
This is as for the comma operator, except the form of the
node is slightly different because it is a cons node instead
of a binop node.
*/
static void
sop_sep(register struct node *p)
{
register struct node *loc1;
/* Generate first subtree and discard. */
TRACEPB("sop_sep", printf("(%p)\n", p));
gen_e1(p -> n_car);
loc1 = ss_pop();
free_temp(loc1);
x_lookat(loc1);
gen_e1(p -> n_next);
TICKX("sop_sep");
}
/*
Generate code for the inner (0/1) logical operators.
This calls gen_b1 and inserts a load of 1 in one branch and 0
in the other branch. Do NOT confuse these two "branches", which
correspond to true_lab and false_lab, with the other branches
that execute or skip portions of what is written in the conditional
expression itself. There are substantial similarities between this
operation and the ternary, but the comparison can be more confusing
than helpful.
*/
static void
sop_logical(register struct node *p, int op)
{
register struct node *loc1;
struct node *else_lab, *end_lab;
TRACEPB("sop_logical", printf("(%p, %d)\n", p, op));
else_lab = new_clabel();
end_lab = new_clabel();
gen_b1(0, p, FALL_THROUGH, else_lab);
/* NOW get the temp; it should get the temp reserved by gen_b1;
if gen_b1 doesn't reserve a temp because there are no
stacked registers showing, then there should be an available reg */
if (!nd_free) {
/* the branch stacks won't match after the convergence point */
g_error(p, "internal: sop_logical: temp unavailable");
}
loc1 = get_dtemp();
loc1 -> n_cltype = int_type;
g_2l2(X_MOVE, one_loc, loc1);
g_bra(end_lab = new_clabel());
g_label(else_lab);
g_1l(X_CLR, loc1);
g_label(end_lab);
ss_push(loc1);
TICKX("sop_logical");
}
/*
Generate code for unary "not" operator.
*/
static void
sop_not(struct node *q)
{
register struct node *loc1, *loc3;
register struct node *p;
register struct node *else_lab, *end_lab;
register int flip, tok;
TRACEPB("sop_not", printf("(%p)\n", q));
flip = 0;
p = q;
while ((tok = p -> n_arg1 -> n_type) == NOT_TOK) {
flip ^= 1;
p = p -> n_arg1;
}
if (tok == LOR_TOK || tok == LAND_TOK) {
else_lab = new_clabel();
end_lab = new_clabel();
gen_b1(0, p -> n_arg1, FALL_THROUGH, else_lab);
if (!nd_free) {
g_error(p, "internal: sop_not: temp unavailable");
}
loc1 = get_dtemp();
loc1 -> n_cltype = q -> n_cltype;
if (flip) {
g_2l2(X_MOVE, one_loc, loc1);
}
else {
g_1l(X_CLR, loc1);
}
g_bra(end_lab = new_clabel());
g_label(else_lab);
if (flip) {
g_1l(X_CLR, loc1);
}
else {
g_2l2(X_MOVE, one_loc, loc1);
}
g_label(end_lab);
ss_push(loc1);
}
else {
gen_e1(p -> n_arg1);
loc1 = ss_pop();
if (is_aloc(loc1)) {
g_2l2(X_CMPA, zero_loc, loc1);
}
else {
g_1l(X_TST, loc1);
}
/* result is to wind up in a dtemp */
if (is_dtloc(loc1)) {
loc3 = loc1;
}
else {
loc3 = get_dtemp();
}
if (flip) {
g_1(X_SNE, loc3);
}
else {
g_1(X_SEQ, loc3);
}
loc3 -> n_cltype = q -> n_cltype;
g_2l2(X_AND, one_loc, loc3);
ss_push(loc3);
}
TICKX("sop_not");
}
/*
Add (or subtract) two loc_node references.
NOTE: because of the s_stack, an argument must be fetched exactly
once in all branches
*/
static void
sop_add(struct node *p, int op)
{
struct node *loci, *locp;
register struct node *loc3;
register int reg;
register unsigned long scale;
TRACEPB("sop_add", printf("(%p, %d)\n", p, op));
if (p -> n_arg1 -> n_cltype -> t_typtok == INT_TYPE &&
p -> n_arg2 -> n_cltype -> t_typtok == INT_TYPE) {
/* must be ordinary integer operation */
gen_e1(p -> n_arg2);
gen_e1(p -> n_arg1);
sop_dbinop(p, op);
RETURN_VOID("sop_add");
}
/* operation involves at least one pointer */
if (p -> n_arg1 -> n_cltype -> t_typtok == INT_TYPE) {
locp = (void *) p -> n_arg2;
gen_e1(p -> n_arg2);
gen_e1(p -> n_arg1); /* generates the int last */
}
else {
locp = (void *) p -> n_arg1;
gen_e1(p -> n_arg1);
gen_e1(p -> n_arg2); /* generates the int last */
}
scale = locp -> n_cltype -> t_link -> t_tsize;
loci = ss_pop(); /* integer loc_node */
locp = ss_pop();
TRACEP("sop_add", printf("pointer/array %s: p=%p i=%p\n",
(op == MINUS_TOK)? "subtraction" : "addition",locp, loci));
if (loci -> n_cltype -> t_typtok != INT_TYPE) {
#ifdef DEBUG
if (op != MINUS_TOK) {
g_error(p, "pointer plus pointer?");
ss_push(locp);
RETURN_VOID("sop_add");
}
#endif /* DEBUG */
locp = x_subp(locp, loci, scale);
ss_push(locp);
RETURN_VOID("sop_add");
}
/* WARNING: check for postponement of scaling... */
loci = x_scale(loci, locp -> n_cltype -> t_link -> t_tsize);
locp = x_addpsi(locp, loci, op);
locp -> n_cltype = p -> n_cltype;
ss_push(locp);
TRACEPX("sop_add", printf("returns locp=%p\n", locp));
return;
}
/*
Generate code for a relational 0/1 operator.
*/
static void
sop_relop(struct node *p, int op)
{
register struct node *loc1, *loc2, *loc3;
register struct type_node *t;
TRACEPB("sop_relop", printf("(%p, %s)\n", p, ps_tok(op)));
loc1 = resolve(ss_pop());
loc2 = resolve(ss_pop());
/* Avoid an extra move if at all possible. */
if (is_xloc(loc2) || is_cloc(loc1)) {
/* All is well. */
loc3 = loc2;
}
else if (is_xloc(loc1) || is_cloc(loc2)) {
/* exchange the operands and the compare. */
/* Note: will work since they aren't both POP_LOC */
loc3 = loc1;
loc1 = loc2;
switch(op) {
/* symmetrical */
case NE_TOK:
case EQUAL_TOK: break;
case GE_TOK: op = LE_TOK; break;
case GT_TOK: op = LT_TOK; break;
case LE_TOK: op = GE_TOK; break;
case LT_TOK: op = GT_TOK; break;
}
}
else {
/* Push loc2 so that get_temp() may change it. */
ss_push(loc2);
loc3 = get_temp(loc2);
loc2 = ss_pop();
g_2l1(X_MOVE, loc2, loc3);
free_temp(loc2);
}
/* must NOT ss_push anything but final result now, since operands
may be switched */
/* Do the comparison. */
g_2l1(X_CMP, loc1, loc3);
t = loc1 -> n_cltype;
/* Free everything */
free_temp(loc1);
free_temp(loc3);
/* Get a D temporary for the result and make it an integer */
/* If this D temporary is a register just freed, so be it */
loc3 = get_dtemp();
loc3 -> n_cltype = int_type;
/* Generate an Sxx instruction. */
if (t -> t_typtok != INT_TYPE || (t -> t_mclass & UNSIGNED_MOD)) {
/* pointer/unsigned */
switch(op) {
case EQUAL_TOK: g_1(X_SEQ, loc3); break;
case NE_TOK: g_1(X_SNE, loc3); break;
/* exchanged forms since loc2 is generated first */
case GE_TOK: g_1(X_SLS, loc3); break;
case GT_TOK: g_1(X_SLO, loc3); break;
case LE_TOK: g_1(X_SHS, loc3); break;
case LT_TOK: g_1(X_SHI, loc3); break;
}
}
else {
/* signed */
switch(op) {
case EQUAL_TOK: g_1(X_SEQ, loc3); break;
case NE_TOK: g_1(X_SNE, loc3); break;
case GE_TOK: g_1(X_SLE, loc3); break;
case GT_TOK: g_1(X_SLT, loc3); break;
case LE_TOK: g_1(X_SGE, loc3); break;
case LT_TOK: g_1(X_SGT, loc3); break;
}
}
/* Mask off all but the last bit. */
/* This result is DEFINED as int, so it has to be extended */
g_2l2(X_AND, one_loc, loc3);
ss_push(loc3);
RETURN_VOID("sop_relop");
}
/*
Generate code for ternary operator.
Note that this involves machinations to get the stack balanced
the same regardless of which branch gets executed!
CAUTION: has_effect() loc_nodes involve postponed side effects.
This code always loads both branches into a temporary. If that
eventually gets optimized away, keep in mind that has_effect()
loc_nodes MUST be loaded at the end of the branch to get the
side effects done, since ?: is a sequence operator.
*/
static void
sop_ternop(register struct node *p)
{
struct node *else_lab, *end_lab;
register struct node *loc, *loc1, *loc2, *loc3;
register int reg3, t_ok, is_t1, is_t2;
int ssb_save, sf_save;
/* Allocate label nodes. */
TRACEPB("sop_ternop", printf("(%p)\n", p));
else_lab = new_clabel();
end_lab = new_clabel();
/*
Identify a result temporary. This identifies one
register which will not get clobbered when we do the
balancing ss_restore() pop, since its push, if any,
occurs right now before we log the ss_bot location.
This register may in fact be used by one or both branches
as a temporary, but it will be released by the end
of each branch--or perhaps be the result itself.
If ss_restore() actually pops something, then this
register will be used as the result, since it is
guaranteed not to be clobbered. (This practice is
excessively restrictive, but it will do for quite a
while, since deep-lying ternaries aren't terribly
common anyhow.)
*/
loc3 = get_temp(p); /* this will be our result */
free_reg(reg3 = loc3 -> n_reg1);
t_ok = TRUE;
/* Generate the effective boolean */
/* p -> n_arg1 is the TEST expression, not a branch */
gen_b1(0, p -> n_arg1, FALL_THROUGH, else_lab);
/* NOW record the s_stack pointers */
ssb_save = ss_bot;
sf_save = ss_forks;
ss_forks = ss_count; /* pre-existing items save-and-restore */
/* p -> n_arg2 is the FIRST BRANCH */
gen_e1(p -> n_arg2);
loc1 = resolve(ss_pop());
g_qmove(loc1, loc3);
is_t1 = is_xtloc(loc1);
free_temp(loc1);
/* restore the s_stack and r_stack */
while (ss_bot > ssb_save) {
if (ss_restore()) {
t_ok = FALSE;
}
}
g_bra(end_lab);
/* p -> n_arg3 is the SECOND branch */
g_label(else_lab);
gen_e1(p -> n_arg3);
loc2 = resolve(ss_pop());
g_qmove(loc2, loc3);
is_t2 = is_xtloc(loc2);
free_temp(loc2);
g_label(end_lab);
/* again restore the s_stack and r_stack */
while (ss_bot > ssb_save) {
if (ss_restore()) {
t_ok = FALSE;
}
}
/* if a branch returns a temporary of the right sort, use it */
if (!t_ok) {
}
else if (is_areg(reg3)) {
if (is_t1 && is_areg(loc1 -> n_reg1)) {
loc3 -> n_reg1 = loc1 -> n_reg1;
}
else if (is_t2 && is_areg(loc2 -> n_reg1)) {
loc3 -> n_reg1 = loc2 -> n_reg1;
}
}
else {
/* dreg, since it came from get_temp */
if (is_t1 && is_dreg(loc1 -> n_reg1)) {
loc3 -> n_reg1 = loc1 -> n_reg1;
}
else if (is_t2 && is_dreg(loc2 -> n_reg1)) {
loc3 -> n_reg1 = loc2 -> n_reg1;
}
}
TRACE("gappend",
if(reg3 != loc3 -> n_reg1) {
printf("altering ternary temp reg to: ");
pr_arg(loc3);
printf("\n");
}
);
TRACEP("sop_ternop",
if(reg3 != loc3 -> n_reg1) {
printf("altering ternary temp reg to: ");
pr_loc(loc3);
printf("\n");
}
);
/* restore the forks threshold */
ss_forks = sf_save;
loc3 -> n_cltype = p -> n_cltype;
(void) alloc_reg(loc3 -> n_reg1);
ss_push(loc3);
TICKX("sop_ternop");
}
/*
Add a constant offset to the loc_node to produce a new one
*/
static void
sop_dot(struct node *p)
{
struct node *locp, *loci;
register struct node *loc3;
TRACEPB("sop_dot", printf("(%p)\n", p));
locp = ss_pop();
loci = ss_pop();
if (loci = fix_cloc(loci)) {
if (locp -> n_mode == EA_MODE) {
if (!is_zloc(loci)) {
locp = locn_xdupl(locp);
locp -> n_mode = VALUE_MODE;
loci = x_scale(loci, 1L);
locp = x_addpsi(locp, loci, PLUS_TOK);
locp -> n_mode = EA_MODE;
}
}
else {
g_error(p, "sop_dot: structure not lvalue");
}
}
else {
g_error(p, "sop_dot: not constant");
}
locp -> n_cltype = p -> n_cltype;
ss_push(locp);
TICKX("sop_dot");
}
/*
Add a constant offset to the pointer and perform indirection
Before even THINKING about optimizing (p++) -> elt, read note
in sop_ustar.
*/
static void
sop_arrow(struct node *p)
{
struct node *locp, *loci;
register struct node *loc3;
register int areg;
TRACEPB("sop_arrow", printf("(%p)\n", p));
locp = ss_pop();
loci = ss_pop();
if (loci = fix_cloc(loci)) {
if (!is_zloc(loci)) {
loci = x_scale(loci, 1L);
locp = x_addpsi(locp, loci, PLUS_TOK);
}
}
else {
g_error(p, "sop_arrow: not constant");
}
ss_push(locp);
gen_indir(p); /* argument is for type only */
TICKX("sop_arrow");
}
/*
Generate code for the unary "address-of" operator
and in other cases where an address is required.
*/
static void
sop_adr(struct node *p)
{
struct node *loc1, *loc3;
struct node *p1;
register int x;
TRACEPB("sop_adr", printf("(%p)\n", p));
p1 = p -> n_arg1;
switch (p1 -> n_type) {
case USTAR_TOK:
case ARROW_TOK:
case DOT_TOK:
case ID_TOK:
gen_e1(p1);
loc3 = ss_pop();
if (loc3 -> n_mode == EA_MODE) {
loc3 = locn_chmod(loc3, VALUE_MODE);
}
else if (loc3 -> n_mode == VALUE_MODE &&
loc3 -> n_cltype -> t_typtok == ARRAY_TYPE) {
}
else {
g_error(p, "argument doesn't have an address");
loc3 = locn_xconst(0L);
}
break;
case CAST_TOK:
sop_adr(p1);
loc3 = ss_pop();
switch(loc3 -> n_cltype -> t_typtok) {
case INT_TYPE:
case POINTER_TYPE:
x = mlen(p1 -> n_arg1) - mlen(p1);
if (x < 0) {
goto derror;
}
if (x > 0) {
g_help(p, "are you sure of what address you are asking for?");
loc1 = locn_xconst((long)x);
loc1 -> n_cltype = int_type;
loc3 = x_addpsi(loc3, loc1, PLUS_TOK);
}
break;
default:
derror:
g_error(p,
"cannot take address; highly dubious construction");
loc3 = locn_xconst(0L);
}
break;
default:
TRACEP("sop_adr", printf("p1->n_type = %s\n",
ps_tok(p1->n_type)));
g_error(p, "this form could not possibly have an address");
loc3 = locn_xconst(0L);
}
loc3 -> n_cltype = p -> n_cltype;
ss_push(loc3);
RETURN_VOID("sop_adr");
}
/*
Generate code for ordinary unary operators.
TILDE_TOK: ~ 1's complement.
UMINUS_TOK: - 2's complement.
UPLUS_TOK: + do nothing.
*/
static void
sop_unop(register struct node *p, int op)
{
struct node *loc1, *loc2, *loc3;
TRACEPB("sop_unop", printf("(%p, %s)\n", p, ps_tok(op)));
if (op == UPLUS_TOK) {
/* CAUTION: UPLUS is a meaningful ANSI operator */
RETURN_VOID("sop_unop");
}
loc1 = ss_pop();
/* result is to wind up in a dtemp */
if (is_dtloc(loc1)) {
loc3 = loc1;
}
else {
loc3 = get_dtemp();
g_qmove(loc1, loc3);
}
switch (op) {
case TILDE_TOK:
loc3 -> n_cltype = p -> n_cltype;
g_1l(X_NOT, loc3);
break;
case UMINUS_TOK:
loc3 -> n_cltype = p -> n_cltype;
g_1l(X_NEG, loc3);
break;
default:
g_error(p, "sop_unop: shouldn't happen");
printf("sop_unop: bad op is %s\n", ps_tok(op));
}
ss_push(loc3);
TICKX("sop_unop");
}
/*
called by sop_ustar: NOT a special form
increment or decrement
*/
static void
sop_ppop(struct node *p)
{
register struct node *loc1, *loc3;
register struct type_node *t1;
register unsigned long scale;
int tok;
TRACEPB("sop_ppop", printf("(%p)\n", p));
loc1 = resolve(ss_pop());
t1 = loc1 -> n_cltype;
switch(p -> n_type) {
case PRE_DEC_TOK:
tok = X_SUB;
goto dopre;
case PRE_INC_TOK:
tok = X_ADD;
goto dopre;
case POST_DEC_TOK:
tok = X_SUB;
goto dopost;
case POST_INC_TOK:
tok = X_ADD;
goto dopost;
}
dopre:
switch(t1 -> t_typtok) {
case POINTER_TYPE:
/* yes, sooner or later we have to do the scale */
scale = t1 -> t_link -> t_tsize;
if (scale != 1) {
g_2l2(tok, locn_xconst(scale), loc1);
}
else {
g_2l2(tok, one_loc, loc1);
}
break;
case INT_TYPE:
g_2l2(tok, one_loc, loc1);
break;
default:
if (tok == X_ADD) {
g_error(p, "increment of non-scalar");
}
else {
g_error(p, "decrement of non-scalar");
}
}
ss_push(loc1);
RETURN_VOID("sop_ppop");
/* pre-exit */
/* PLEASE no fallthrough here */
dopost:
if (p -> n_oflags & OPT_NONEED) {
/* feed it a fish to keep stacks balanced */
loc3 = loc1;
}
else {
/* draw a temporary to hold the old value */
loc3 = get_temp(p);
loc3 -> n_cltype = loc1 -> n_cltype;
g_2l1(X_MOVE, loc1, loc3);
}
switch(t1 -> t_typtok) {
case POINTER_TYPE:
/* yes, sooner or later we have to do the scale */
scale = t1 -> t_link -> t_tsize;
if (scale != 1) {
g_2l2(tok, locn_xconst(scale), loc1);
break;
}
/* FALL_THROUGH */
case INT_TYPE:
g_2l2(tok, one_loc, loc1);
break;
default:
if (tok == X_ADD) {
g_error(p, "increment of non-scalar");
}
else {
g_error(p, "decrement of non-scalar");
}
}
ss_push(loc3);
RETURN_VOID("sop_ppop");
/* post-exit */
}
/*
Generate code for function calls.
*/
static void
sop_call(struct node *p)
{
struct node *loc1, *loct;
register unsigned long size;
register int reg;
/* Set up the loc_node representing the returned value */
#ifdef D0_ONLY
TRACEPB("sop_call", printf("(%p)\n", p));
loc1 = d0_loc;
#else
if (p -> n_cltype -> t_typtok == INT_TYPE) {
loc1 = d0_loc;
}
else {
loc1 = a0_loc;
}
#endif /* D0_ONLY */
/*
push the scratch registers and return scratch register NOW
the items pushed cannot be part of the args, since the args
aren't even generated yet; all the items are items that
must survive the function call.
*/
force_free(loc1 -> n_reg1);
push_scratch();
/* THEN generate the argument list */
size = gen_args(p -> n_arg2, TRUE);
/* Expression serving as function code locator */
gen_e1(p -> n_arg1);
loct = resolve(ss_pop());
g_1(X_JSR, loct);
free_temp(loct);
/* Reset the stack pointer */
if (size) {
loct = locn_xconst(size);
loct -> n_cltype = (size > INT_MAX)? long_type : int_type;
g_2l1(X_ADDA, loct, a7_loc);
}
/*
NOTE:
in some cases, ensuing code might follow up the stack
adjustment with a stack pre-decrement. peep_hole()
might fix this, or a global pointer could be kept to
the most recent adjustment and then either used or
cancelled at the next branch, rts, or stack pointer change.
*/
/* allocate and return the result */
(void) alloc_reg(loc1 -> n_reg1);
loc1 -> n_cltype = p -> n_cltype;
ss_push(loc1);
TICKX("sop_call");
}
/*
Push a series of call arguments and return a value to be used
in the compensating addq instruction
*/
static unsigned long
gen_args(register struct node *arg, int top)
{
register unsigned long x;
register struct node *loc1, *loc3;
static unsigned int argdepth = 0;
static unsigned int a7busy = 0;
TRACEPB("gen_args", printf("(%p, %d)\n", arg, top));
if (arg) {
++argdepth;
if (arg -> /* BUG t_typtok */ n_type == SEPARATOR_TOK) {
x = gen_args(arg -> n_next, top);
x += gen_args(arg -> n_car, FALSE);
}
else {
gen_e1(arg);
loc1 = resolve(ss_pop());
x = (unsigned long) (unsigned) mlen(loc1);
if (x == 1) {
/* cast to int type using dreg */
if (!is_cloc(loc1)) {
loc1 =
x_cast(loc1, 1, loc1->n_cltype->t_mclass, 2,
TRUE /* 3/5/89 */);
}
loc1 = locn_xdupl(loc1);
loc1 -> n_cltype -> t_mclass &= ~CHAR_MOD;
loc1 -> n_cltype -> t_tsize = 2;
x = 2;
}
TRACEP("gen_args",
printf("push %p; top=%d; ss_bot=%d\n",
loc1, top, ss_bot));
/* note: we can skip pushing at the top level only */
if (!is_equiv(loc1, pop_loc)) {
if (top && call_1arg && !ss_bot && !a7busy) {
g_2l1(X_MOVE, loc1, nopush_loc);
a7busy = 1;
x = 0;
}
/* note: the assembler apparently can't
generate a 4-byte PEA */
else if (is_cloc(loc1) &&
mlen(loc1) == 4 &&
!loc1 -> n_cid &&
loc1 -> n_const &&
loc1 -> n_const <= 32767 &&
loc1 -> n_const >= -32768) {
loc1 = locn_xdupl(loc1);
loc1 -> n_mode = EA_MODE;
loc1 -> n_scflag = X2_ABSW;
loc1 -> n_cltype = int_type;
g_1(X_PEA, loc1);
}
else {
g_2l1(X_MOVE, loc1, push_loc);
}
free_temp(loc1);
}
}
--argdepth;
if (argdepth == 0) {
a7busy = 0;
}
RETURN_ULONG("gen_args", x);
}
else {
RETURN_ULONG("gen_args", NULL);
}
}